#example grids from package
# bell2010
# mackay1992
# fbb2003
## IMPORT CLEAN PARTICIPANT-LEVEL GRIDS
## load grid (continuous constructs only) for each participant
## #importExcel from OpenRepGrid package, creates S4 object
gridType = "short" # use "simple" or "short" where short has even shorter construct names
p4 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P04_clean.xlsx")) # researcher
p5 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P05_clean.xlsx")) # researcher
p6 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P06_clean.xlsx")) # researcher
p7 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P07_clean.xlsx")) # researcher
p15 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P15_clean.xlsx")) # researcher
p8 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P08_clean.xlsx")) # designer
p9 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P09_clean.xlsx")) # designer
p10 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P10_clean.xlsx")) # designer
p11 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P11_clean.xlsx")) # designer
p12 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P12_clean.xlsx")) # designer
p13 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P13_clean.xlsx")) # designer
p14 <- importExcel(file= paste0("data/participant_grids/", gridType,"/P14_clean.xlsx")) # designer
## IMPORT RAW CODED-DATA DATAFRAME
## row = one participant construct (elements as cols)
df_raw <- read_csv(file = "data/CODED_CONSTRUCTS.csv", na=c("", "NA"))
names <- c("RESEARCHER P4","RESEARCHER P5","RESEARCHER P6","RESEARCHER P7","RESEARCHER P15","DESIGNER P8","DESIGNER P9","DESIGNER P10","DESIGNER P11","DESIGNER P12","DESIGNER P13","DESIGNER P14")
stimuli <- c("CAREBEAR_BARS","LADY_LINE","BULLET_BARS","CARTOMAP","MAN_INFO","PENGUIN_DISTS","HISTO_DIST", "IXN_EBARS","IXN_SLOPE","BAYES_RIDGES")
## CREATE GROUP-LEVEL GRIDS
g_researchers <- p4 + p5 + p6 + p7 + p15
g_designers <- p8 + p9 + p10 + p11 + p12 + p13 + p14
## CREATE MASTER GRID
g_all <- p4 + p5 + p6 + p7 + p15 + p8 + p9 + p10 + p11 + p12 + p13 + p14
## CREATE LIST OF GRIDS
list_all <- list(p4 , p5 , p6 , p7 , p15 , p8 , p9 , p10 , p11 , p12 , p13 , p14)
## MINIMAL CODED-DATA FRAME ONLY CONTINUOUS CONSTRUCTS
df_coded <- df_raw %>%
filter(CONSTRUCT_TYPE == "NUMERIC") %>%
mutate(
PID = as.factor(PID),
SAMPLE = factor(SAMPLE),
CONSTRUCT_TYPE = factor(CONSTRUCT_TYPE),
POLE_LEFT = factor(POLE_LEFT),
POLE_RIGHT = factor(POLE_RIGHT),
POLES = paste0(POLE_LEFT,"-",POLE_RIGHT),
FIRST = factor(FIRST),
SECOND = factor(SECOND),
THIRD = factor(THIRD),
CODE_FULL = factor(CODE_STANDARD),
CODE = factor(paste0(FIRST,"(",SECOND,")")),
RELFEXIVE = as.logical(REFLEXIVE),
MATCH = as.logical(MATCH)) %>%
mutate(
across(CAREBEAR_BARS:BAYES_RIDGES, .fns = as.numeric)) %>%
select(
-(CODE_DH:CODE_STANDARD)
)
## LONG DATAFRAME
## row = one participant construct X element
df_codedElements <- df_coded %>%
pivot_longer(
cols = CAREBEAR_BARS:BAYES_RIDGES,
names_to ="ELEMENT") %>%
mutate(
value=as.numeric(value),
POLES = factor(POLES),
ELEMENT = factor(ELEMENT, levels=stimuli)
)
## CREATE DF FOR EFA
## need constructs as columns
df_eda <- df_codedElements |>
pivot_wider(
names_from=CODE,
values_from=value
)
write_csv(df_eda, file = "data/NUMERIC_CONSTRUCTS_WIDE.csv")
## LOOP all participants and render correlation heatmap on constructs
type = "correlation_heatmaps/"
topic = "construct_"
level = "participant/"
i <- 1
for (l in list_all) {
title <- names[i]
#get correlations
c <- constructCor(l, trim=50, index=FALSE, method="pearson")
#render heatmap
g <- ggcorrplot(c, show.diag = TRUE, show.legend = FALSE, type = "lower",
hc.order = FALSE, #hc.ordered using hclust
lab = TRUE, lab_size=2.75,
tl.cex = 8, tl.srt = 25
) + labs(title=paste0(title, " Construct Correlations"))
print(g)
#save plot
ggsave(g, filename = paste0("plots/",level, type, topic, title,".png"))
i <- i + 1
}
#cleanup
rm(g,c)
## LOOP all participants and render correlation heatmap on elements
type = "correlation_heatmaps/"
topic = "element_"
level = "participant/"
i <- 1
for (l in list_all) {
title <- names[i]
#get correlations
c <- elementCor(l, trim=50, index=FALSE, method="pearson")
#render heatmap
g <- ggcorrplot(c, show.diag = TRUE, show.legend = FALSE, type = "lower",
hc.order = FALSE, #hc.ordered using hclust
lab = TRUE, lab_size=2.5,
tl.cex = 10, tl.srt = 25
) + labs(title=paste0(title, " Element Correlations"))
print(g)
#save plot
ggsave(g, filename = paste0("plots/",level, type, topic, title,".png"))
i <- i + 1
}
#cleanup
rm(g,c)
#PRINT CLUSTER FOR EACH PARTICIPANT
# # calculate cluster analysis
# # https://docs.openrepgrid.org/articles/web/clustering.html
type = "cluster_dendrograms/"
topic = "construct_"
level = "participant/"
i <- 1
for (l in list_all) {
# PRINT DENDROGRAM TO FILE
title <- names[i]
png(filename=paste0("plots/",level,type,topic,title,".png"), width=10, height = 5, units = "in", res = 300)
cluster(l, along = 1,#1=constructs, 2 = elements, 0 = both (default)
dmethod = "euclidean",
cmethod = "ward.D",
align = TRUE,
cex = 0, # Overall plot text size
lab.cex = 0.8, # Label size
main = title,
mar = c(2, 1, 2, 25),
trim=NA)
# title(main = paste0(title, " - ", "Elements"), adj = 0.05, line=-1, cex.main = 0.85)
dev.off() #jazz for saving base plots
# PRINT DENDROGRAM TO SCREEN
cluster(l, along = 1,#1=constructs, 2 = elements, 0 = both (default)
dmethod = "euclidean",
cmethod = "ward.D",
align = TRUE,
cex = 0, # Overall plot text size
lab.cex = 0.8, # Label size
main = title,
mar = c(2, 1, 2, 25),
trim=NA)
i <- i + 1
}
#PRINT CLUSTER FOR EACH PARTICIPANT
# # calculate cluster analysis
# # https://docs.openrepgrid.org/articles/web/clustering.html
type = "cluster_dendrograms/"
topic = "element_"
level = "participant/"
i <- 1
for (l in list_all) {
# PRINT DENDROGRAM TO FILE
title <- names[i]
png(filename=paste0("plots/",level,type,topic,title,".png"),width=10, height = 4, units = "in", res = 300)
cluster(l, along = 2,#1=constructs, 2 = elements, 0 = both (default)
dmethod = "euclidean",
cmethod = "ward.D",
align = TRUE,
cex = 0, # Overall plot text size
lab.cex = 0.8, # Label size
main = title,
mar = c(2, 1, 2, 25),
trim=NA)
# title(main = paste0(title, " - ", "Elements"), adj = 0.05, line=-1, cex.main = 0.85)
dev.off() #jazz for saving base plots
# PRINT DENDROGRAM TO SCREEN
cluster(l, along = 2,#1=constructs, 2 = elements, 0 = both (default)
dmethod = "euclidean",
cmethod = "ward.D",
align = TRUE,
cex = 0, # Overall plot text size
lab.cex = 0.8, # Label size
main = title,
mar = c(2, 1, 2, 25),
trim=NA)
i <- i + 1
}
## PRINT SIMPLE BERTIN FOR EACH PARTICIPANT
type = "bertin_simpleplots/"
topic = "simpleBertin_"
level = "participant/"
i=1
for (l in list_all){
title = names[i]
png(filename=paste0("plots/",level,type,topic,title,".png"),width=12, height = 4, units = "in", res = 300)
# https://docs.openrepgrid.org/articles/web/visualization-bertin.html
# Adjust layout segments: make bertin display area square inside full plot
bertin(
l,
along = 0,
dmethod = "euclidean",
cmethod = "ward.D",
align = TRUE,
trim = 100,
main = paste0(title), # suppress internal title
colors = c("white", "black"),
lheight=0.8,
id = c(TRUE, FALSE)
)
# Overlay title in top-left
#par(fig = c(0, 1, 0.95, 1), mai = c(0, 0, 0, 0), new = TRUE)
par(fig = c(0, 1, 0.92, 1), mai = c(0, 0, 0, 0), new = TRUE)
plot.new()
title(main = title, adj = 0.05, line=-1, cex.main = 0.85)
dev.off() #jazz for saving base plots
i=i+1
}
print("ONLY SAVED TO FILE")
## [1] "ONLY SAVED TO FILE"
i <- 1
for (l in list_all) {
title_text <- names[i]
# Create square plotting device (if saving or in interactive session)
#par(fig = c(0.1,1,0.1,1), mai = c(0, 0, 0.9, 0.5), new = FALSE) # more top and right margin
par(fig = c(0, 1, 0, 1), mai = c(1.2, 4.3, 1.5, 2.5), new = FALSE)
plot.new()
#asp <- 1 # aspect ratio
# Adjust layout segments: make bertin display area square inside full plot
bertinCluster(
l,
along = 0,
dmethod = "euclidean",
cmethod = "ward.D",
align = TRUE,
type = "rectangle",
cex = 1,
lab.cex = 1,
trim = 50,
draw.axis = FALSE,
main = NULL, # suppress internal title
colors = c("white", "darkred"),
lheight=0.75,
id = c(TRUE, FALSE),
#xsegs = c(0.0, 0.15, 0.8, 0.91, 1), # wider center region
#ysegs = c(0.0, 0.15, 0.8, 1.0) # to match horizontal size
xsegs = c(0.0, 0.17, 0.77, 0.91, 1), # allow a little wider middle block
ysegs = c(0.0, 0.14, 0.8, 1.0) # more space to the matrix vertically
)
# Overlay title in top-left
#par(fig = c(0, 1, 0.95, 1), mai = c(0, 0, 0, 0), new = TRUE)
par(fig = c(0, 1, 0.92, 1), mai = c(0, 0, 0, 0), new = TRUE)
plot.new()
title(main = title_text, adj = 0.2, line=-1, cex.main = 0.85)
i <- i + 1
}
## TODO WRITE SIMPLE BERTIN LOOP
i=1
for (l in list_all){
par(fig = c(0,1,0,1), mai = c(0.2, 0.1, 0.1, 0.2), new = FALSE) # more top and right margin
plot.new()
title = names[i]
clustered_elements <- cluster(l,
along = 2, #1=constructs, 2 = elements, 0 = both (default)
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff option
cex = 1, lab.cex = 1, main = paste0(title, " - ", "Elements")
)
clustered_constructs <- cluster(l,
along = 1, #1=constructs, 2 = elements, 0 = both (default)
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff option
cex = 1, lab.cex = 1, main = paste0(title, " - ", "Constructs")
)
clustered <- cluster(l,
along = 0, #1=constructs, 2 = elements, 0 = both (default)
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff option
cex = 1, lab.cex = 1, main = paste0(title, " - ", "Both")
)
# https://docs.openrepgrid.org/articles/web/visualization-bertin.html
#bertin(clustered,
#trim=50, draw.axis = TRUE,
#colors = c("white", "darkred"))
#op <- par(fig = c(0,1,0.1,1), mai=c(0,0,0.2,0), cex.main=0.85, adj=0, new = TRUE)
#title(title)
# Create square plotting device (if saving or in interactive session)
#par(fig = c(0.1,1,0.1,1), mai = c(0, 0, 0.2, 0), new = TRUE) # more top and right margin
#par(fig = c(0, 1, 0, 1), mai = c(1.2, 4.2, 1.5, 2.5), new = FALSE)
#plot.new()
#asp <- 1 # aspect ratio
# Adjust layout segments: make bertin display area square inside full plot
bertin(
clustered,
along = 0,
dmethod = "euclidean",
cmethod = "ward.D",
align = TRUE,
trim = 50,
main = paste0(title), # suppress internal title
colors = c("white", "darkred"),
lheight=0.8,
id = c(TRUE, FALSE)
)
# Overlay title in top-left
#par(fig = c(0, 1, 0.95, 1), mai = c(0, 0, 0, 0), new = TRUE)
par(fig = c(0, 1, 0.92, 1), mai = c(0, 0, 0, 0), new = TRUE)
plot.new()
title(main = title, adj = 0.05, line=-1, cex.main = 0.85)
i<-i+1
}
## PRINT PCA BIPLOT for each participant
type = "pca_biplots/"
topic = "biplot2d_"
level = "participant/"
i=1
for (l in list_all){
## PRINT BIPLOT TO FILE
title = names[i]
png(filename=paste0("plots/",level,type,topic,title,".png"),width=8, height = 8, units = "in", res = 300)
# https://docs.openrepgrid.org/articles/web/visualization-biplot.html
biplot2d(l,
dim = c(2,1),
zoom = 1,
## construct s
c.lines = TRUE,
col.c.lines= gray(0.9),
c.label.cex = 0.5,
c.labels.inside = FALSE,
c.label.col = "blue",
## elements
# rect.margins = c(2,2),
e.point.col = "red",
e.label.col = "red",
e.label.cex = 0.5, #element label size
## size and margins
mai = c(0.2,1.5,.2,1.5),
unity=TRUE, #just makes it neater
scale.e = 0.75,
)
op <- par(# fig = c(0,1,0.5,1),
cex.main = 0.75, #title size
new = TRUE)
title(title)
dev.off()
## PRINT TO SCREEN
## PRINT PCA RESULTS
print(title)
r <- constructPca(l)
print(r, cut = 0.3)
biplot2d(l,
dim = c(2,1),
zoom = 1,
## construct s
c.lines = TRUE,
col.c.lines= gray(0.9),
c.label.cex = 0.5,
c.labels.inside = FALSE,
c.label.col = "blue",
## elements
# rect.margins = c(2,2),
e.point.col = "red",
e.label.col = "red",
e.label.cex = 0.5, #element label size
## size and margins
mai = c(0.2,1.5,.2,1.5),
unity=TRUE, #just makes it neater
scale.e = 0.75,
)
op <- par(# fig = c(0,1,0.5,1),
cex.main = 0.75, #title size
new = TRUE)
title(title)
i=i+1
}
## [1] "RESEARCHER P4"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2 RC3
## not biased - biased 0.88 -0.33 0.31
## dry - sensational 0.91 0.31
## don't trust data - trust data 0.95
## don't believe author's message - believe author's message -0.89
## low audience edu level - high edu level -0.88 0.38
## stop - scroll 0.96
##
## RC1 RC2 RC3
## SS loadings 3.19 1.25 1.17
## Proportion Var 0.53 0.21 0.19
## Cumulative Var 0.53 0.74 0.93
## [1] "RESEARCHER P5"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2 RC3
## not pro designers - pro designers 0.98
## no skin in the game - skin in the game 0.44 0.69 0.49
## informative - persuasive -0.89 0.33
## don't trust data - trust data 0.89
## don't trust author - trust author 0.94
## low author competancy - high author competancy 0.95
## general audience - niche audience 0.97
## less educated audience - more educated audience 0.95
## ignore - stop and look 0.93 0.32
##
## RC1 RC2 RC3
## SS loadings 4.53 2.44 1.50
## Proportion Var 0.50 0.27 0.17
## Cumulative Var 0.50 0.77 0.94
## [1] "RESEARCHER P6"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1
## viewer no data viz training - viewer data viz training 0.93
## target audience ignore - target audience look
## participant ignore - participant look 0.91
## author less design skills - author more design skills
## author less stats skills - author more stats skills 0.99
## author ineffective conveying message - author effective conveying message
## RC2
## viewer no data viz training - viewer data viz training
## target audience ignore - target audience look 0.96
## participant ignore - participant look
## author less design skills - author more design skills
## author less stats skills - author more stats skills
## author ineffective conveying message - author effective conveying message 0.88
## RC3
## viewer no data viz training - viewer data viz training
## target audience ignore - target audience look
## participant ignore - participant look
## author less design skills - author more design skills 0.97
## author less stats skills - author more stats skills
## author ineffective conveying message - author effective conveying message 0.30
##
## RC1 RC2 RC3
## SS loadings 2.71 1.81 1.06
## Proportion Var 0.45 0.30 0.18
## Cumulative Var 0.45 0.75 0.93
## [1] "RESEARCHER P7"
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2 RC3
## informal - formal -0.99
## does not care about design - care about design 0.62 0.59 0.48
## does not care about data - care about data -0.98
## author low des skills - author high des skills 0.84 0.48
## author did not try - author tried 0.79 0.53
## would not change based on context - would change -0.34 -0.92
## topic not interesting - topic interesting -0.86 0.30
## vis des not interesting - vis des interesting 0.74 0.36 0.53
## time look (in sec) - time look (in sec) 0.89
##
## RC1 RC2 RC3
## SS loadings 3.93 2.87 1.70
## Proportion Var 0.44 0.32 0.19
## Cumulative Var 0.44 0.76 0.94
## [1] "RESEARCHER P15"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2 RC3
## context-dependent - stand-alone 0.66 -0.40 0.33
## quick to make - long time to make 0.93
## sole author - team author 0.90
## author no stats background - stats phd 0.92
## story-first - data-first 0.90
## hate (rip up) - love (publish) 0.93 0.30
## not clever - clever 0.86
##
## RC1 RC2 RC3
## SS loadings 2.25 1.97 1.94
## Proportion Var 0.32 0.28 0.28
## Cumulative Var 0.32 0.60 0.88
## [1] "DESIGNER P8"
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2 RC3
## hard to understand - easy to understand 0.31 0.83
## very ugly - very nice 0.90
## author low data-analysis skills - high data-analysis skills 0.70 0.61
## author low data-viz skills - high data-viz skills 0.94
## author low comm skills - high comm skills -0.86
## scroll - look -0.99
## low visual accessibility - high visual accessibility 0.88
## Niche audience - General audience -0.98
## Audience low data literacy - Audience high data literacy 0.90 -0.34
## informative - call-to-action 0.32
## Data collector != designer - Data collector = designer 0.85
## bad data rep - good data rep 0.88
## not trustworthy - trustworthy 0.64 0.72
##
## RC1 RC2 RC3
## SS loadings 5.40 3.50 1.78
## Proportion Var 0.42 0.27 0.14
## Cumulative Var 0.42 0.68 0.82
## [1] "DESIGNER P9"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2 RC3
## would not look at - would look at 0.67 0.70
## niche audience - general audience 0.85 0.42
## designer less design skills - designer more design skills 0.94
## would not read contextual info - would read contextual info 0.97
##
## RC1 RC2 RC3
## SS loadings 2.09 1.10 0.74
## Proportion Var 0.52 0.28 0.19
## Cumulative Var 0.52 0.80 0.99
## [1] "DESIGNER P10"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1
## easy to understand - hard to understand -0.72
## scroll past - stop and look 0.72
## less likely to save - more likely to save 0.89
## less likely to read contextual info - more likely to read
## less likely to "like" - more likely to "like" 0.75
## author less data comm (des) skills - author more data comm (des) skills 0.90
## author less data analysis skills - author more data analysis skills
## RC2
## easy to understand - hard to understand 0.40
## scroll past - stop and look -0.46
## less likely to save - more likely to save
## less likely to read contextual info - more likely to read
## less likely to "like" - more likely to "like" -0.36
## author less data comm (des) skills - author more data comm (des) skills
## author less data analysis skills - author more data analysis skills 0.95
## RC3
## easy to understand - hard to understand
## scroll past - stop and look 0.35
## less likely to save - more likely to save
## less likely to read contextual info - more likely to read 0.95
## less likely to "like" - more likely to "like" 0.35
## author less data comm (des) skills - author more data comm (des) skills
## author less data analysis skills - author more data analysis skills
##
## RC1 RC2 RC3
## SS loadings 3.26 1.55 1.23
## Proportion Var 0.47 0.22 0.18
## Cumulative Var 0.47 0.69 0.86
## [1] "DESIGNER P11"
## Warning in cor.smooth(r): Matrix was not positive definite, smoothing was done
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2
## Pub source (Reddit) (3 = new york post)\r\n - pub source (Jstor) -0.88 -0.33
## not legible (no idea what it's writing) - legible (can read) -0.85
## not readable (no idea about message) - readable (can understand) 0.95
## Not showcasing the data - Showcasing data -0.90
## Not tell story with data - Tell story with data (call for action) 0.87 0.35
## will not share - Share 0.65 0.75
## will not comment - Leave comment 0.47 0.82
## will not upvote - upvote 0.85 0.42
## scroll - stop and look 0.73 0.62
## Poorly communicated - Communicated 0.67
## bad design quality - good design quality 0.95
## Untrustworthy - Trustworthy -0.82 -0.55
## RC3
## Pub source (Reddit) (3 = new york post)\r\n - pub source (Jstor)
## not legible (no idea what it's writing) - legible (can read) -0.47
## not readable (no idea about message) - readable (can understand)
## Not showcasing the data - Showcasing data
## Not tell story with data - Tell story with data (call for action)
## will not share - Share
## will not comment - Leave comment
## will not upvote - upvote
## scroll - stop and look
## Poorly communicated - Communicated 0.68
## bad design quality - good design quality
## Untrustworthy - Trustworthy
##
## RC1 RC2 RC3
## SS loadings 6.39 4.00 0.95
## Proportion Var 0.53 0.33 0.08
## Cumulative Var 0.53 0.87 0.94
## [1] "DESIGNER P12"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC2
## audience low stats skills - audience high stats skills 0.97
## illustrative focus - data chart type focus -0.95
## low design quality - high design quality 0.54
## misleading - unbiased 0.83
## not much thought for chart type - thought put into chart type 0.95
## not much thought for storytelling - thought put into storytelling -0.72 -0.33
## author low stats skills - author high stats skills\r\n 0.90 0.34
## scroll on - pause 0.46
## color=decorative/nonexistent - color=visual encoding 0.70
## RC3
## audience low stats skills - audience high stats skills
## illustrative focus - data chart type focus
## low design quality - high design quality 0.78
## misleading - unbiased -0.47
## not much thought for chart type - thought put into chart type
## not much thought for storytelling - thought put into storytelling 0.52
## author low stats skills - author high stats skills\r\n
## scroll on - pause 0.87
## color=decorative/nonexistent - color=visual encoding
##
## RC1 RC2 RC3
## SS loadings 3.57 2.61 1.93
## Proportion Var 0.40 0.29 0.21
## Cumulative Var 0.40 0.69 0.90
## [1] "DESIGNER P13"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1
## less time spent designing (labor) - more time spent designing (labor) 0.39
## less thought put into designing - more thought put into designing 0.82
## less successful at conveying message - more successful at conveying message
## explanatory - call for action/behavior change 0.78
## pure information - author has intention (biased) 0.92
## scroll on - stop and look 0.67
## RC3
## less time spent designing (labor) - more time spent designing (labor) 0.89
## less thought put into designing - more thought put into designing 0.50
## less successful at conveying message - more successful at conveying message
## explanatory - call for action/behavior change 0.48
## pure information - author has intention (biased)
## scroll on - stop and look 0.73
## RC2
## less time spent designing (labor) - more time spent designing (labor)
## less thought put into designing - more thought put into designing
## less successful at conveying message - more successful at conveying message 0.98
## explanatory - call for action/behavior change 0.33
## pure information - author has intention (biased)
## scroll on - stop and look
##
## RC1 RC3 RC2
## SS loadings 2.75 1.90 1.20
## Proportion Var 0.46 0.32 0.20
## Cumulative Var 0.46 0.77 0.97
## [1] "DESIGNER P14"
##
## #################
## PCA of constructs
## #################
##
## Number of components extracted: 3
## Type of rotation: varimax
##
## Loadings:
## RC1 RC3 RC2
## low visual comm skills - high visual comm skills 0.78 0.46 0.40
## specific audience - general audience 0.84 0.38 0.36
## hard to understand - easy to understand 0.93
## scroll past - stop and look 0.32 0.90
## informative - narrative 0.62 0.75
##
## RC1 RC3 RC2
## SS loadings 1.89 1.76 1.25
## Proportion Var 0.38 0.35 0.25
## Cumulative Var 0.38 0.73 0.98
## PRINT PCA BIPLOT for each participant
type = "cluster_dendrograms/"
topic = "construct_"
level = "group/"
##### PRINT PLOTS TO FILE
png(filename=paste0("plots/",level,type,topic,"RESEARCHERS_dendrogram",".png"),width=15, height = 12, units = "in", res = 300)
cluster(g_researchers, along = 1, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "RESEARCHERS")
dev.off()
## quartz_off_screen
## 2
##### PRINT PLOTS TO SCREEN
cluster(g_researchers, along = 1, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "RESEARCHERS")
## PRINT PCA BIPLOT for each participant
type = "cluster_dendrograms/"
topic = "construct_"
level = "group/"
####### PRINT PLOTS TO FILE
png(filename=paste0("plots/",level,type,topic,"DESIGNERS_dendrogram",".png"),width=15, height = 12, units = "in", res = 300)
cluster(g_designers, along = 1, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "DESIGNERS")
dev.off()
## quartz_off_screen
## 2
####### PRINT PLOTS TO SCREEN
cluster(g_designers, along = 1, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "DESIGNERS")
## PRINT PCA BIPLOT for each participant
type = "cluster_dendrograms/"
topic = "elements_"
level = "group/"
##### PRINT PLOTS TO FILE
png(filename=paste0("plots/",level,type,topic,"RESEARCHERS_dendrogram",".png"),width=10, height = 4, units = "in", res = 300)
cluster(g_researchers, along = 2, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "RESEARCHERS")
dev.off()
## quartz_off_screen
## 2
##### PRINT PLOTS TO SCREEN
(researcher_elements <- cluster(g_researchers, along = 2, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "RESEARCHERS"))
##
## META DATA:
## Number of constructs: 37
## Number of elements: 10
##
## SCALE INFO:
## The grid is rated on a scale from 1 (left pole) to 5 (right pole)
##
## RATINGS:
## CARTOMAP - 5 6 - PENGUIN_DISTS
## IXN_SLOPE - 4 | | 7 - CAREBEAR_BARS
## IXN_EBARS - 3 | | | | 8 - TAXMAN_INFO
## HISTO_DIST - 2 | | | | | | 9 - LADY_LINE
## BAYES_RIDGES - 1 | | | | | | | | 10 - BULLET_BARS
## | | | | | | | | | |
## in in the game (1) 2.5 5 3.5 3.5 1 1 5 5 1.5 4 (1) no skin in t
## ot interesting (2) 4 4 4 4 1 1 5 3 3 1 (2) topic intere
## stop and look (3) 5 5 5 5 1 2.5 4.5 5 2 4 (3) ignore
## pro designers (4) 4.5 4.5 4.5 4.5 1 3.5 4 3 1.5 3 (4) not pro desi
## look (in sec) (5) 3 5 5 5 2 3 3 4 3 2 (5) time look (i
## es interesting (6) 5 5 5 5 1 2 2 4 1 1 (6) vis des not
## igh des skills (7) 4 5 4 5 1 1 3 4 1 1 (7) author low d
## g time to make (8) 3 5 4 5 1.5 1.5 5 1 1 3 (8) quick to mak
## design skills (9) 2 5 3 4 3 2 4 2 1 3 (9) author less
## team author (10) 5 5 5 5 2 4 5 1 1 2 (10) sole author
## e conveying m (11) 4 2 3 4 2 3 4 3 5 3 (11) author effe
## dience ignore (12) 5 3 5 4 3 4 5 3 5 4 (12) target audi
## scroll (13) 4 5 5 5 2 2 5 4 5 5 (13) stop
## or competancy (14) 4.5 4 5 4 5 5 5 2 5 5 (14) high author
## 't trust data (15) 5 5 3 3 5 5 5 3 5 5 (15) trust data
## would change (16) 3 3 4 5 5 5 4 3 5 5 (16) would not
## about design (17) 4 5 4 4 1 3 1 3 1 1 (17) does not ca
## sensational (18) 4 5 5 5 3 3 1 2 1 1 (18) dry
## author tried (19) 3 5 4 4 1 2 2 1 1 1 (19) author did
## stand-alone (20) 5 5 5 2 3 2 1 1 1.5 1 (20) context-dep
## clever (21) 2.5 5 4 5 4.5 4 1.5 2 2 1.5 (21) not clever
## ove (publish) (22) 3 4 4 4 4 2 2 1.5 2 2 (22) hate (rip u
## biased (23) 5 5 5 5 4 5 1 4 2 2 (23) not biased
## nce edu level (24) 5 5 4 4 4 4 1 3 1 1 (24) high edu le
## story-first (25) 4.5 4.5 4.5 4.5 2 4.5 1 5 1 1.5 (25) data-first
## stats skills (26) 5 3 4 4 3 5 1 2 1 1 (26) author more
## viz training (27) 5 4 4 4 3 5 1 3 1 2 (27) viewer data
## cipant ignore (28) 5 2 4 4 4 5 3 3 2 2 (28) participant
## persuasive (29) 5 5 5 5 4 5 1 5 3 4 (29) informative
## ts background (30) 5 4 4.5 4.5 3 5 1 4 3 4 (30) stats phd
## 't trust data (31) 5 5 5 5 5 5 1 5 5 5 (31) trust data
## ated audience (32) 5 5 4 4 4.5 4 1 3 3.5 3 (32) more educat
## eral audience (33) 5 5 5 4 4 5 1 3 4 3 (33) niche audie
## trust author (34) 5 5 5 5 5 5 1 3 5 3 (34) trust autho
## re about data (35) 5 5 5 5 4 5 1 1 2 4 (35) care about
## informal (36) 5 5 5 5 4 5 1 2 3 4 (36) formal
## hor's message (37) 5 5 5 5 2 5 1 2 1 5 (37) believe aut
##
##
##
##
##
##
##
## h
## s
##
## g
## n
## i
## e
## e
##
##
## c
## e
##
##
##
## c
## r
##
## n
## e
##
## p
##
## v
##
##
##
##
##
##
##
## e
## n
## r
## d
##
## h
## PRINT PCA BIPLOT for each participant
type = "cluster_dendrograms/"
topic = "elements_"
level = "group/"
##### PRINT PLOTS TO FILE
plot.new()
png(filename=paste0("plots/",level,type,topic,"DESIGNERS_dendrogram",".png"),width=10, height = 4, units = "in", res = 300)
cluster(g_designers, along = 2, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "DESIGNERS")
dev.off()
## quartz_off_screen
## 2
##### PRINT PLOTS TO SCREEN
(designer_elements <- cluster(g_designers, along = 2, #1=constructs, 2 = elements, 0 = both (default)
trim=200,
dmethod = "euclidean",#distance measure TODO evaluate diff options
cmethod="ward.D", #agglomeration method TODO evaluate diff options
align = TRUE, #align b4 clustering? reverses constructs if necessary to yield maximal simmilarity
cex = 1, lab.cex = 1, main = "DESIGNERS"))
##
## META DATA:
## Number of constructs: 56
## Number of elements: 10
##
## SCALE INFO:
## The grid is rated on a scale from 1 (left pole) to 5 (right pole)
##
## RATINGS:
## TAXMAN_INFO - 5 6 - CARTOMAP
## HISTO_DIST - 4 | | 7 - PENGUIN_DISTS
## IXN_SLOPE - 3 | | | | 8 - LADY_LINE
## IXN_EBARS - 2 | | | | | | 9 - CAREBEAR_BARS
## BAYES_RIDGES - 1 | | | | | | | | 10 - BULLET_BARS
## | | | | | | | | | |
## isual encoding (1) 1 1 4 5 5 1 1 5 5 4 (1) color=decorat
## likely to save (2) 5 5 5 5 5 1 1 4 4 3 (2) less likely t
## ata-viz skills (3) 1 4 3 4 2 1 1 5 2 2 (3) author low da
## very nice (4) 2 5 2 5 3 1 1 5 3 3 (4) very ugly
## design quality (5) 3 4 4 4 1 2 2 4 4 3 (5) low design qu
## likely to read (6) 5 5 3 4 1 1 3 5 2 4 (6) less likely t
## to understand (7) 4 4 2 3 2 1 4 5 1 1 (7) hard to under
## pause (8) 3 4 1 3 1 3 3 5 5 3 (8) scroll on
## accessibility (9) 4 5 1 1 2 3 3 4 3 2 (9) low visual ac
## to understand (10) 1 4 3 3 4 1 2 3 1 1 (10) hard to unde
## storytelling (11) 2 4 1 4 1 1 2 1 1 1 (11) not much tho
## t conveying m (12) 4 3 1 1 2 2 3 1 1 1 (12) less success
## to understand (13) 5 5 2 3 4 1 2 1 1 1 (13) hard to unde
## Communicated (14) 3 3 3 3 2 2 1.5 2.5 1.2 1.5 (14) Poorly commu
## would look at (15) 3 3 4 4 1 1 2 2 3 1 (15) would not lo
## eral audience (16) 3 4 4 4 3 1 2 2 2 2 (16) specific aud
## l comm skills (17) 3 4 4 4 3 1 2 2 1 2 (17) low visual c
## esign quality (18) 3.2 4 3.5 4 2.8 2.2 2 1.7 2 2.5 (18) bad design q
## comm (des) sk (19) 4 3.5 3.5 5 3 1 2.5 2 3 2 (19) author less
## to chart type (20) 1 2 5 2 1 1 1 3 4 3 (20) not much tho
## unbiased (21) 1 1 5 1 3 2 1 2 3 4 (21) misleading
## informative (22) 1 5 5 1 1 4 2 2 3 1 (22) call-to-acti
## scroll past (23) 1 1 1 1 1 4 1 5 5 5 (23) stop and loo
## ntextual info (24) 1 2 2 1 1 3 1 3 2 5 (24) would read c
## trustworthy (25) 1 2 2 2 2 1 2 5 3 4 (25) not trustwor
## alysis skills (26) 1 1 2 3 3 2 1 5 4 4 (26) author low d
## good data rep (27) 1 3 3 3 2 1 1 5 2 5 (27) bad data rep
## owcasing data (28) 1 2 1 1 2 1 1 2 4 2 (28) Not showcasi
## analysis skil (29) 1 2 2 1 2 2 1 3 4 2 (29) author less
## Trustworthy (30) 1 1 1 1 1.8 1.7 2 3 3 2 (30) Untrustworth
## l not comment (31) 1 1 1.5 1 1 1 1 3 3 2 (31) Leave commen
## le (can read) (32) 1 1 1 1 1 1 1 1 3 2 (32) not legible
## e/nonexistent (33) 5 5 2 1 1 5 5 1 1 2 (33) color=visual
## ikely to save (34) 1 1 1 1 1 5 5 2 2 3 (34) more likely
## ta-viz skills (35) 5 2 3 2 4 5 5 1 4 4 (35) high data-vi
## very ugly (36) 4 1 4 1 3 5 5 1 3 3 (36) very nice
## esign quality (37) 3 2 2 2 5 4 4 2 2 3 (37) high design
## ad contextual (38) 1 1 3 2 5 5 3 1 4 2 (38) more likely
## to understand (39) 2 2 4 3 4 5 2 1 5 5 (39) easy to unde
## scroll on (40) 3 2 5 3 5 3 3 1 1 3 (40) pause
## accessibility (41) 2 1 5 5 4 3 3 2 3 4 (41) high visual
## to understand (42) 5 2 3 3 2 5 4 3 5 5 (42) easy to unde
## for storytell (43) 4 2 5 2 5 5 4 5 5 5 (43) thought put
## to understand (44) 1 1 4 3 2 5 4 5 5 5 (44) easy to unde
## t conveying m (45) 2 3 5 5 4 4 3 5 5 5 (45) more success
## communicated (46) 3 3 3 3 4 4 4.5 3.5 4.8 4.5 (46) Communicated
## d not look at (47) 3 3 2 2 5 5 4 4 3 5 (47) would look a
## ific audience (48) 3 2 2 2 3 5 4 4 4 4 (48) general audi
## l comm skills (49) 3 2 2 2 3 5 4 4 5 4 (49) high visual
## esign quality (50) 2.8 2 2.5 2 3.2 3.8 4 4.3 4 3.5 (50) good design
## comm (des) sk (51) 2 2.5 2.5 1 3 5 3.5 4 3 4 (51) author more
## for chart typ (52) 5 4 1 4 5 5 5 3 2 3 (52) thought put
## misleading (53) 5 5 1 5 3 4 5 4 3 2 (53) unbiased
## all-to-action (54) 5 1 1 5 5 2 4 4 3 5 (54) informative
## stop and look (55) 5 5 5 5 5 2 5 1 1 1 (55) scroll past
## ntextual info (56) 5 4 4 5 5 3 5 3 4 1 (56) would not re
## MANUAL DENDROGRAM FOR ELEMENTS—DESIGNERES
# get ratings
r <- getRatingLayer(g_designers)
# transpose for elements
r <- t(r)
# distance matrix for elements
d <- dist(r, method = "euclidean")
# cluster analysis
fit.elements <- hclust(d, method = "ward.D")
# create dendrogram tree
dend.el_design <- as.dendrogram(fit.elements)
# plot(dend.el_design, horiz=TRUE)
## MANUAL DENDROGRAM FOR ELEMENTS
# get ratings
r <- getRatingLayer(g_researchers)
# transpose for elements
r <- t(r)
# distance matrix for elements
d <- dist(r, method = "euclidean")
# cluster analysis
fit.elements <- hclust(d, method = "ward.D")
# create dendrogram tree
dend.el_research <- as.dendrogram(fit.elements)
# plot(dend.el_research, horiz=TRUE)
## the tanglegram / back to back dendrograms
# l <- dendlist(dend.el_design, dend.el_research)
# tanglegram(l)
plot.new()
png(filename="plots/group/COMPARE_ELEMENTS.png",
width=12, height = 6, units = "in", res = 300)
dendbackback(dend.el_research,dend.el_design,
sort = TRUE,
margin_inner = 8,
main_left = "RESEARCHERS",
main_right = "DESIGNERS",
lab.cex = 1,
highlight_distinct_edges = TRUE,
common_subtrees_color_branches = TRUE,
common_subtrees_color_lines = TRUE,
main="ELEMENTS by GROUP")
dev.off()
## quartz_off_screen
## 2
## to fix error in dev.off error , MAY NEED to run several times
# while (!is.null(dev.list())) dev.off()
## PRINT TO SCREEN
dendbackback(dend.el_research,dend.el_design,
sort = TRUE,
margin_inner = 8,
main_left = "RESEARCHERS",
main_right = "DESIGNERS",
lab.cex = 1,
highlight_distinct_edges = TRUE,
common_subtrees_color_branches = TRUE,
common_subtrees_color_lines = TRUE,
main="ELEMENTS by GROUP")
type = "pca_biplots/"
topic = ""
level = "group/"
plot.new()
png(filename=paste0("plots/",level,type,topic,"RESEARCHERS_biplot",".png"))
# ,width=10, height = 4, units = "in", res = 300)
## RESEARCHER BIPLOT
title = "RESEARCHERS"
# print(title)
biplot2d(g_researchers,
dim = c(2,1),
zoom = 1,
## construct s
c.lines = TRUE,
col.c.lines= gray(0.9),
c.label.cex = 0.5,
c.labels.inside = FALSE,
c.label.col = "blue",
## elements
# rect.margins = c(2,2),
e.point.col = "red",
e.label.col = "red",
e.label.cex = 0.5, #element label size
## size and margins
mai = c(0.2,1.5,.2,1.5),
unity=TRUE, #just makes it neater
scale.e = 0.75,
)
op <- par(# fig = c(0,1,0.5,1),
cex.main = 0.75, #title size
new = TRUE)
title(title)
dev.off()
## quartz_off_screen
## 2
## RESEARCHER BIPLOT
title = "RESEARCHERS"
# print(title)
biplot2d(g_researchers,
center=1, #row mean centering of constructs, 2=column (elements), 3 = double, 4 = midpoint
normalize = 0, #none, 1 = rows, 2 = columns
dim = c(2,1),
zoom = 1,
## construct s
c.lines = TRUE,
col.c.lines= gray(0.9),
c.label.cex = 0.5,
c.labels.inside = FALSE,
c.label.col = "blue",
## elements
# rect.margins = c(2,2),
e.point.col = "red",
e.label.col = "red",
e.label.cex = 0.5, #element label size
## size and margins
mai = c(0.2,1.5,.2,1.5),
unity=TRUE, #just makes it neater
scale.e = 0.75,
)
op <- par(# fig = c(0,1,0.5,1),
cex.main = 0.75, #title size
new = TRUE)
title(title)
type = "pca_biplots/"
topic = ""
level = "group/"
plot.new()
png(filename=paste0("plots/",level,type,topic,"DESIGNERS_biplot",".png"))
# ,width=10, height = 4, units = "in", res = 300)
## DESIGNERS BIPLOT
title = "DESIGNERS"
# print(title)
biplot2d(g_designers,
dim = c(2,1),
zoom = 1,
## construct s
c.lines = TRUE,
col.c.lines= gray(0.9),
c.label.cex = 0.5,
c.labels.inside = FALSE,
c.label.col = "blue",
## elements
# rect.margins = c(2,2),
e.point.col = "red",
e.label.col = "red",
e.label.cex = 0.5, #element label size
## size and margins
mai = c(0.2,1.5,.2,1.5),
unity=TRUE, #just makes it neater
scale.e = 0.75,
)
op <- par(# fig = c(0,1,0.5,1),
cex.main = 0.75, #title size
new = TRUE)
title(title)
dev.off()
## quartz_off_screen
## 2
## DESIGNERS BIPLOT
title = "DESIGNERS"
# print(title)
biplot2d(g_researchers,
dim = c(2,1),
zoom = 1,
## construct s
c.lines = TRUE,
col.c.lines= gray(0.9),
c.label.cex = 0.5,
c.labels.inside = FALSE,
c.label.col = "blue",
## elements
# rect.margins = c(2,2),
e.point.col = "red",
e.label.col = "red",
e.label.cex = 0.5, #element label size
## size and margins
mai = c(0.2,1.5,.2,1.5),
unity=TRUE, #just makes it neater
scale.e = 0.75,
)
op <- par(# fig = c(0,1,0.5,1),
cex.main = 0.75, #title size
new = TRUE)
title(title)
## TABLE AT FIRST
crosstab(data = df_codedElements, rowvar = FIRST, colvar = SAMPLE, type = "percent") # or "prop.col",
## SAMPLE
## FIRST DESIGNER RESEARCHER Total
## artifact 34.41% 15.05% 49.46%
## audience 5.38% 5.38% 10.75%
## data 1.08% 3.23% 4.30%
## maker 18.28% 16.13% 34.41%
## mode 1.08% 0.00% 1.08%
## Total 60.22% 39.78% 100.00%
## DATAFRAME
df <- df_codedElements %>%
select(FIRST,SECOND,THIRD,POLES,CODE_FULL,ELEMENT,SAMPLE,PID)
## TWO LEVEL TABLE
# one row per participant element X code
table_df <- df %>%
count(FIRST, SECOND, SAMPLE) %>%
pivot_wider(names_from = SAMPLE, values_from = n, values_fill = 0) %>%
arrange(FIRST, SECOND) %>%
group_by(FIRST) %>%
mutate(FIRST = if_else(row_number() == 1, FIRST, ""),
DESIGNER=DESIGNER/10,
RESEARCHER=RESEARCHER/10
) %>%
ungroup()
table_df %>% gt()
| FIRST | SECOND | DESIGNER | RESEARCHER |
|---|---|---|---|
| artifact | behavior | 14 | 5 |
| design | 4 | 5 | |
| genre | 0 | 1 | |
| intent | 5 | 1 | |
| register | 0 | 1 | |
| trust | 3 | 1 | |
| usability | 6 | 0 | |
| audience | behavior | 0 | 1 |
| skill | 2 | 3 | |
| type | 3 | 1 | |
| data | source | 1 | 0 |
| topic | 0 | 1 | |
| trust | 0 | 2 | |
| maker | dilligence | 4 | 4 |
| intent | 2 | 1 | |
| skill | 11 | 7 | |
| trust | 0 | 2 | |
| type | 0 | 1 | |
| mode | type | 1 | 0 |
## THREE LEVEL TABLE
table_df <- df %>%
count(FIRST, SECOND, THIRD, SAMPLE) %>%
pivot_wider(
names_from = SAMPLE,
values_from = n,
values_fill = 0
) %>%
arrange(FIRST, SECOND, THIRD) %>%
group_by(FIRST, SECOND) %>%
mutate(
THIRD = as.character(THIRD),
SECOND = if_else(row_number() == 1, SECOND, ""),
FIRST = if_else(row_number() == 1, FIRST, ""),
DESIGNER=DESIGNER/10,
RESEARCHER=RESEARCHER/10
) %>%
ungroup()
table_df %>% gt()
| FIRST | SECOND | THIRD | DESIGNER | RESEARCHER |
|---|---|---|---|---|
| artifact | behavior | commenting | 1 | 0 |
| encounter | 7 | 4 | ||
| infoseek | 2 | 0 | ||
| like/upvote | 2 | 0 | ||
| redesign | 0 | 1 | ||
| save | 1 | 0 | ||
| sharing | 1 | 0 | ||
| artifact | design | appeal | 1 | 0 |
| appropriateness | 1 | 0 | ||
| clever | 0 | 1 | ||
| duration | 0 | 1 | ||
| encoding | 1 | 0 | ||
| like | 0 | 1 | ||
| quality | 1 | 0 | ||
| salience | 0 | 1 | ||
| vibe | 0 | 1 | ||
| artifact | genre | completeness | 0 | 1 |
| artifact | intent | data | 1 | 0 |
| narrative | 1 | 0 | ||
| NA | 3 | 1 | ||
| artifact | register | NA | 0 | 1 |
| artifact | trust | NA | 3 | 1 |
| artifact | usability | accessibility | 1 | 0 |
| clarity | 1 | 0 | ||
| effort | 2 | 0 | ||
| legibility | 1 | 0 | ||
| readability | 1 | 0 | ||
| audience | behavior | encounter | 0 | 1 |
| audience | skill | dataliteracy | 1 | 1 |
| statistics | 1 | 0 | ||
| NA | 0 | 2 | ||
| audience | type | NA | 3 | 1 |
| data | source | NA | 1 | 0 |
| data | topic | interest | 0 | 1 |
| data | trust | NA | 0 | 2 |
| maker | dilligence | data | 0 | 1 |
| design | 2 | 1 | ||
| storytelling | 1 | 0 | ||
| NA | 1 | 2 | ||
| maker | intent | NA | 2 | 1 |
| maker | skill | communication | 4 | 1 |
| data | 2 | 0 | ||
| design | 3 | 3 | ||
| statistics | 1 | 2 | ||
| visualization | 1 | 0 | ||
| NA | 0 | 1 | ||
| maker | trust | NA | 0 | 2 |
| maker | type | group size | 0 | 1 |
| mode | type | NA | 1 | 0 |
## CONSTRUCT LEVEL TABLE
table_df <- df %>%
group_by(FIRST, SECOND, THIRD, SAMPLE) %>%
summarise(POLES_values = paste(unique(POLES), collapse = ","), .groups = "drop") %>%
pivot_wider(
names_from = SAMPLE,
values_from = POLES_values,
values_fill = ""
) %>%
arrange(FIRST, SECOND, THIRD) %>%
group_by(FIRST, SECOND) %>%
mutate(
THIRD = as.character(THIRD),
SECOND = if_else(row_number() == 1, SECOND, ""),
FIRST = if_else(row_number() == 1, FIRST, "")
) %>%
ungroup()
table_df %>% gt()
| FIRST | SECOND | THIRD | DESIGNER | RESEARCHER |
|---|---|---|---|---|
| artifact | behavior | commenting | will not comment-Leave a comment | |
| encounter | low engagement (scroll past)-high engagement (stop and look),would not spend time looking at-would spend time looking at,scroll past-stop and look,keep scrolling-stop and look ,scroll on-pause,scroll on-stop and look | stop-scroll,ignore-stop and look,participant would not want to look-participant would want to look,how long to look at chart (in seconds)-NA | ||
| infoseek | would not read contextual information-would read more contextual information,less likely to read contextual information-more likely to read | |||
| like/upvote | less likely to “like” react-more likely to “like”,will not upvote-will upvote | |||
| redesign | would not want to change based on context-would want to change | |||
| save | less likely to save-more likely to save | |||
| sharing | will not share-Share it with someone | |||
| artifact | design | appeal | very ugly (personal preference)-very nice | |
| appropriateness | bad data representation-good data representation | |||
| clever | not clever-clever | |||
| duration | quick to make-long time to make | |||
| encoding | color as decorative or nonexistent-color as a visual encoding | |||
| like | hate (rip up)-love (publish) | |||
| quality | lower design quality-higher design quality | |||
| salience | visualization design not interesting-visualization design interesting | |||
| vibe | dry-sensational | |||
| artifact | genre | completeness | context-dependent-stand-alone | |
| artifact | intent | data | Not showcasing the data-Showcasing the data | |
| narrative | Not tell a story with data-Tell a story with data (call for action) | |||
| NA | more illustrative focus-more data chart type focus,explanatory-call for action (want to change behavior),informative-narrative | data serving story (story first)-story serving data (data first/story ad hoc) | ||
| artifact | register | NA | informal-formal | |
| artifact | trust | NA | not trustworthy-trustworthy,Untrustworthy-Trustworthy,misleading-unbiased | not biased-biased |
| artifact | usability | accessibility | low accessibility to visually impaired readers-high accessibility to visually impaired readers | |
| clarity | not clear overall (hard to understand)-clear overall (easy to understand) | |||
| effort | easier to understand-harder to understand,hard to understand-easy to understand | |||
| legibility | not legible (no idea what it’s writing)-legible (I can read it!) | |||
| readability | not readable (no idea about the message)-readable (I can understand it!) | |||
| audience | behavior | encounter | target audience would not want to look-target audience want to look at it | |
| audience | skill | dataliteracy | Audience low data literacy-Audience high data literacy | viewer no training in data visualization-viewer training in data visualization |
| statistics | audience low stats skills-audience high stats skills | |||
| NA | audience education level (low)-high edu level,less educated audience-more educated audience | |||
| audience | type | NA | Niche audience-General audience,niche audience-general audience,specific audience-general audience | general audience-niche audience |
| data | source | NA | Data collector and data designer are different people-Data collector and data designer are the same person | |
| data | topic | interest | topic not interesting-topic interesting | |
| data | trust | NA | don't trust the data-trust the data | |
| maker | dilligence | data | does not care about data-care about data | |
| design | not much thought put into chart type-thought put into chart type,less thought put into designing-more thought put into designing | does not care about design-care about design | ||
| storytelling | not much thought put into storytelling-thought put into storytelling | |||
| NA | less time spent designing (labor)-more time spent designing (labor) | no skin in the game (no thought put into making the design of the viz)-skin in the game (more thought put into making the design),author did not try-author tried | ||
| maker | intent | NA | More informative-More call-to-action, pure information-author has an intention (biased) | informative-persuasive |
| maker | skill | communication | author low communication skills-author high communication skills,Poorly communicated-Communicated,less successful at conveying message-more successful at conveying message,low visual communication skills-high visual communication skills | author not effective in conveying message-author effective in conveying message |
| data | author low data-analysis skills-high data-analysis skills,author less data analysis skills-author more data analysis skills | |||
| design | designer has less design skills-designer has more design skills,author less data communication (design) skills-author more data communication (design) skills,Design quality - bad-Design quality - Good | not professional designers-professional designers,author has less design skills-more design skills,author's design skills (low)-(high) | ||
| statistics | author low stats skills-author high stats skills | author has less statistics skills-more statistics skills,author no stats background-stats phd | ||
| visualization | author low data-viz skills-high data-viz skills | |||
| NA | low author competancy-high author competancy | |||
| maker | trust | NA | don't believe the author's message-believe author's message,don't turst the author-trust the author | |
| maker | type | group size | likelihood of made by one person (3 means unsure)-probably made by multiple people | |
| mode | type | NA | Publication source (Reddit) (3 = new york post) -publication source (Jstor) |
###### CODE LEVEL TO XLS
table_df <- df %>%
group_by(FIRST, SECOND, THIRD, SAMPLE) %>%
summarise(POLES_values = paste(unique(POLES), collapse = "\n"), .groups = "drop") %>%
pivot_wider(
names_from = SAMPLE,
values_from = POLES_values,
values_fill = ""
) %>%
arrange(FIRST, SECOND, THIRD) %>%
group_by(FIRST, SECOND) %>%
mutate(
THIRD = as.character(THIRD),
SECOND = if_else(row_number() == 1, SECOND, ""),
FIRST = if_else(row_number() == 1, FIRST, "")
) %>%
ungroup()
# table_df
# knitr::kable(table_df)
# library(gt)
# table_df %>% gt()
write.xlsx(table_df, file = "table.xlsx", colNames=TRUE, asTable = TRUE)
sessionInfo()
## R version 4.3.2 (2023-10-31)
## Platform: x86_64-apple-darwin20 (64-bit)
## Running under: macOS Sonoma 14.7.2
##
## Matrix products: default
## BLAS: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/lib/libRblas.0.dylib
## LAPACK: /Library/Frameworks/R.framework/Versions/4.3-x86_64/Resources/lib/libRlapack.dylib; LAPACK version 3.11.0
##
## locale:
## [1] en_US.UTF-8/en_US.UTF-8/en_US.UTF-8/C/en_US.UTF-8/en_US.UTF-8
##
## time zone: America/New_York
## tzcode source: internal
##
## attached base packages:
## [1] stats graphics grDevices utils datasets methods base
##
## other attached packages:
## [1] lmerTest_3.1-3 lme4_1.1-36 Matrix_1.6-5 sjPlot_2.8.17
## [5] see_0.11.0 report_0.6.1 parameters_0.24.2 performance_0.13.0
## [9] modelbased_0.10.0 insight_1.1.0 effectsize_1.0.0 datawizard_1.0.1
## [13] correlation_0.8.7 bayestestR_0.15.2 easystats_0.7.4 ggfortify_0.4.17
## [17] OpenRepGrid_0.1.17 openxlsx_4.2.8 qacBase_1.0.3 gt_1.0.0
## [21] dendextend_1.19.0 webshot2_0.1.1 ggcorrplot_0.1.4.1 tidygraph_1.3.1
## [25] interactions_1.2.0 paletteer_1.6.0 plotly_4.10.4 RColorBrewer_1.1-3
## [29] viridis_0.6.5 viridisLite_0.4.2 ggdist_3.3.2 patchwork_1.3.0
## [33] ggh4x_0.3.0 ggeasy_0.1.5 corrplot_0.95 GGally_2.2.1
## [37] gghalves_0.1.4 ggstatsplot_0.13.0 ggformula_0.12.0 ggridges_0.5.6
## [41] scales_1.3.0 summarytools_1.1.1 magrittr_2.0.3 lubridate_1.9.4
## [45] forcats_1.0.0 stringr_1.5.1 dplyr_1.1.4 purrr_1.0.4
## [49] readr_2.1.5 tidyr_1.3.1 tibble_3.2.1 ggplot2_3.5.1
## [53] tidyverse_2.0.0 psych_2.4.12 Hmisc_5.2-2
##
## loaded via a namespace (and not attached):
## [1] splines_4.3.2 later_1.4.1 XML_3.99-0.18
## [4] rpart_4.1.24 ggExtra_0.10.1 lifecycle_1.0.4
## [7] Rdpack_2.6.2 tcltk_4.3.2 vroom_1.6.5
## [10] globals_0.16.3 processx_3.8.6 lattice_0.22-6
## [13] MASS_7.3-60.0.1 backports_1.5.0 sass_0.4.9
## [16] rmarkdown_2.29 jquerylib_0.1.4 yaml_2.3.10
## [19] httpuv_1.6.15 zip_2.3.2 minqa_1.2.8
## [22] chromote_0.4.0 abind_1.4-8 multcomp_1.4-28
## [25] nnet_7.3-20 TH.data_1.1-3 sandwich_3.1-1
## [28] pvclust_2.2-0 jtools_2.3.0 labelled_2.14.0
## [31] listenv_0.9.1 parallelly_1.42.0 codetools_0.2-20
## [34] xml2_1.3.7 tidyselect_1.2.1 SuppDists_1.1-9.8
## [37] ggeffects_2.2.1 farver_2.1.2 gmp_0.7-5
## [40] broom.mixed_0.2.9.6 matrixStats_1.5.0 base64enc_0.1-3
## [43] jsonlite_1.9.1 Formula_1.2-5 survival_3.8-3
## [46] emmeans_1.10.7 systemfonts_1.1.0 BWStest_0.2.3
## [49] tools_4.3.2 ragg_1.3.3 pryr_0.1.6
## [52] PMCMRplus_1.9.12 Rcpp_1.0.14 glue_1.8.0
## [55] mnormt_2.1.1 gridExtra_2.3 xfun_0.51
## [58] kSamples_1.2-10 distributional_0.5.0 websocket_1.4.2
## [61] numDeriv_2016.8-1.1 withr_3.0.2 fastmap_1.2.0
## [64] boot_1.3-31 digest_0.6.37 timechange_0.3.0
## [67] R6_2.6.1 mime_0.12 estimability_1.5.1
## [70] textshaping_1.0.0 colorspace_2.1-1 generics_0.1.3
## [73] data.table_1.17.0 httr_1.4.7 htmlwidgets_1.6.4
## [76] ggstats_0.9.0 pkgconfig_2.0.3 gtable_0.3.6
## [79] Rmpfr_1.0-0 statsExpressions_1.6.2 furrr_0.3.1
## [82] htmltools_0.5.8.1 multcompView_0.1-10 reformulas_0.4.0
## [85] knitr_1.49 rstudioapi_0.17.1 tzdb_0.4.0
## [88] reshape2_1.4.4 nloptr_2.2.0 coda_0.19-4.1
## [91] checkmate_2.3.2 nlme_3.1-167 cachem_1.1.0
## [94] zoo_1.8-13 sjlabelled_1.2.0 parallel_4.3.2
## [97] miniUI_0.1.1.1 foreign_0.8-88 pillar_1.10.1
## [100] grid_4.3.2 vctrs_0.6.5 promises_1.3.2
## [103] xtable_1.8-4 cluster_2.1.8.1 htmlTable_2.4.3
## [106] evaluate_1.0.3 zeallot_0.1.0 magick_2.8.5
## [109] mvtnorm_1.3-3 cli_3.6.4 compiler_4.3.2
## [112] rlang_1.1.5 crayon_1.5.3 rstantools_2.4.0
## [115] labeling_0.4.3 rematch2_2.1.2 ps_1.9.0
## [118] sjmisc_2.8.10 plyr_1.8.9 stringi_1.8.4
## [121] pander_0.6.6 munsell_0.5.1 lazyeval_0.2.2
## [124] mosaicCore_0.9.4.0 sjstats_0.19.0 rapportools_1.2
## [127] hms_1.1.3 bit64_4.6.0-1 future_1.34.0
## [130] shiny_1.10.0 haven_2.5.4 rbibutils_2.3
## [133] igraph_2.1.4 broom_1.0.7 memoise_2.0.1
## [136] RcppParallel_5.1.10 bslib_0.9.0 bit_4.6.0